Classification and detection of cars in Drone images¶

We start by installing and importing the necessary libraries:

In [ ]:
!pip install rasterio
Collecting rasterio
  Downloading rasterio-1.4.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (9.1 kB)
Collecting affine (from rasterio)
  Downloading affine-2.4.0-py3-none-any.whl.metadata (4.0 kB)
Requirement already satisfied: attrs in /usr/local/lib/python3.10/dist-packages (from rasterio) (24.3.0)
Requirement already satisfied: certifi in /usr/local/lib/python3.10/dist-packages (from rasterio) (2024.12.14)
Requirement already satisfied: click>=4.0 in /usr/local/lib/python3.10/dist-packages (from rasterio) (8.1.8)
Collecting cligj>=0.5 (from rasterio)
  Downloading cligj-0.7.2-py3-none-any.whl.metadata (5.0 kB)
Requirement already satisfied: numpy>=1.24 in /usr/local/lib/python3.10/dist-packages (from rasterio) (1.26.4)
Collecting click-plugins (from rasterio)
  Downloading click_plugins-1.1.1-py2.py3-none-any.whl.metadata (6.4 kB)
Requirement already satisfied: pyparsing in /usr/local/lib/python3.10/dist-packages (from rasterio) (3.2.1)
Downloading rasterio-1.4.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (22.2 MB)
   ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 22.2/22.2 MB 46.2 MB/s eta 0:00:00
Downloading cligj-0.7.2-py3-none-any.whl (7.1 kB)
Downloading affine-2.4.0-py3-none-any.whl (15 kB)
Downloading click_plugins-1.1.1-py2.py3-none-any.whl (7.5 kB)
Installing collected packages: cligj, click-plugins, affine, rasterio
Successfully installed affine-2.4.0 click-plugins-1.1.1 cligj-0.7.2 rasterio-1.4.3
In [ ]:
from google.colab import drive
drive.mount('/content/drive')
Mounted at /content/drive
In [ ]:
import rasterio
import numpy as np
from matplotlib import pyplot as plt
import cv2
from shapely.geometry import shape
import geopandas as gpd
import shapely
from rasterio.plot import show
from sklearn.model_selection import train_test_split
import pandas as pd

We define the image path and import it, converting it into a numpy array, to then plot:

In [ ]:
path = 'drive/My Drive/Datasets/Car_detection/L1_car_detect.tif'
In [ ]:
src = rasterio.open(path)
im = src.read()
In [ ]:
im = im.transpose([1,2,0])
In [ ]:
im.shape
Out[ ]:
(12715, 12104, 4)
In [ ]:
im = im.astype('uint8')
In [ ]:
plt.figure(figsize=[16,16])
plt.imshow(im)
plt.axis('off')
Out[ ]:
(-0.5, 12103.5, 12714.5, -0.5)
No description has been provided for this image

Let's now import our reference data. The Drone image was opened in Qgis and two shapefiles were created. One with car location points to create the positive samples and the other with location points of other objects in the image to create the negative samples. Let's import these two files and plot the points on the image. In red the cars and in blue non-cars.

In [ ]:
Cars = gpd.read_file('drive/My Drive/Datasets/Car_detection/Cars.shp')
No_Cars = gpd.read_file('drive/My Drive/Datasets/Car_detection/No_Cars.shp')
In [ ]:
fig, ax = plt.subplots(figsize=(20, 20))
show((src), ax=ax)
Cars.plot(ax=ax,  marker='o', color='red', markersize=15)
No_Cars.plot(ax=ax,  marker='o', color='blue', markersize=15)
Out[ ]:
<Axes: >
No description has been provided for this image

The next step is to extract images from each point. Using rasterio and numpy, we will create a 40-pixel buffer around each point and cut it, making the resulting image 80x80x3 in size. We will also apply some data augmentation operations like rotation and flips.

In [ ]:
Car_img_list = []
for car_point in Cars['geometry']:
  x = car_point.xy[0][0]
  y = car_point.xy[1][0]
  row, col = src.index(x,y)
  img_path = im[row-40:row+40,col-40:col+40,0:3]
  flipVertical  = cv2.flip(img_path, 0)
  flipHorizontal  = cv2.flip(img_path, 1)
  flipBoth = cv2.flip(img_path, -1)
  image90 = cv2.rotate(img_path, cv2.ROTATE_90_CLOCKWISE)
  rows,cols,_ = img_path.shape
  M = cv2.getRotationMatrix2D(((cols-1)/2.0,(rows-1)/2.0),45,1)
  dst = cv2.warpAffine(img_path,M,(cols,rows))

  Car_img_list.append(img_path)
  Car_img_list.append(flipVertical)
  Car_img_list.append(flipHorizontal)
  Car_img_list.append(flipBoth)
  Car_img_list.append(image90)
  Car_img_list.append(dst)

Car_img_list = np.array(Car_img_list)
In [ ]:
No_Car_img_list = []
for No_car_point in No_Cars['geometry']:
  x = No_car_point.xy[0][0]
  y = No_car_point.xy[1][0]
  row, col = src.index(x,y)
  img_path = im[row-40:row+40,col-40:col+40,0:3]
  flipVertical  = cv2.flip(img_path, 0)
  flipHorizontal  = cv2.flip(img_path, 1)
  flipBoth = cv2.flip(img_path, -1)
  image90 = cv2.rotate(img_path, cv2.ROTATE_90_CLOCKWISE)
  rows,cols,_ = img_path.shape
  M = cv2.getRotationMatrix2D(((cols-1)/2.0,(rows-1)/2.0),45,1)
  dst = cv2.warpAffine(img_path,M,(cols,rows))

  No_Car_img_list.append(img_path)
  No_Car_img_list.append(flipVertical)
  No_Car_img_list.append(flipHorizontal)
  No_Car_img_list.append(flipBoth)
  No_Car_img_list.append(image90)
  No_Car_img_list.append(dst)

No_Car_img_list = np.array(No_Car_img_list)
In [ ]:
Car_img_list.shape
Out[ ]:
(852, 80, 80, 3)

Let's plot an example of a car image and a non-car image:

In [ ]:
plt.figure(figsize=[4,4])
plt.imshow(Car_img_list[6])
Out[ ]:
<matplotlib.image.AxesImage at 0x7f8aca8de6d0>
No description has been provided for this image
In [ ]:
plt.figure(figsize=[4,4])
plt.imshow(No_Car_img_list[0])
Out[ ]:
<matplotlib.image.AxesImage at 0x7f8aca8a4250>
No description has been provided for this image

We can now create reference Y vectors with values of 0 for non-car and 1 for car. Then we normalize the values of the image dataset to the range of 0 to 1.

In [ ]:
Y_car = np.ones(len(Car_img_list))
Y_No_car = np.zeros(len(No_Car_img_list))
In [ ]:
X_car = np.array(Car_img_list)
X_No_car = np.array(No_Car_img_list)
In [ ]:
X = np.concatenate((X_car,X_No_car), axis=0)
Y = np.concatenate((Y_car,Y_No_car), axis=0)
In [ ]:
X = X/255

The next step is to divide the data into training and testing.

In [ ]:
x_train, x_test, y_train, y_test = train_test_split(X, Y, test_size = 0.3, random_state = 42)

And then the creation of the custom convolutional network for image classification:

In [ ]:
from keras.models import Model, Sequential
from keras.layers import Input, Conv2D, MaxPooling2D, Flatten, Dense, BatchNormalization, Dropout, Activation
#from tensorflow.keras.optimizers import Adam
from keras.optimizers import Adam
from keras.activations import relu
from keras.losses import binary_crossentropy
from keras import backend as K
import tensorflow as tf
from keras.models import load_model
In [ ]:
Model = Sequential()
Model.add(Conv2D(32,(3,3), kernel_initializer="he_normal", padding='same', input_shape=(x_train.shape[1:])))
Model.add(Activation('relu'))
Model.add(MaxPooling2D(pool_size=(2,2),strides=2))
Model.add(BatchNormalization())
Model.add(Dropout(0.25))
Model.add(Conv2D(64,(3,3), kernel_initializer="he_normal", padding='same'))
Model.add(Activation('relu'))
Model.add(MaxPooling2D(pool_size=(2,2),strides=2))
Model.add(BatchNormalization())
Model.add(Dropout(0.25))
Model.add(Conv2D(128,(3,3), kernel_initializer="he_normal", padding='same'))
Model.add(Activation('relu'))
Model.add(MaxPooling2D(pool_size=(2,2),strides=2))
Model.add(Flatten())
Model.add(Dense(128))
Model.add(Activation('relu'))
Model.add(Dropout(0.25))
Model.add(Dense(32))
Model.add(Activation('relu'))
Model.add(Dense(1))
Model.add(Activation('sigmoid'))
Model.compile(loss='binary_crossentropy', optimizer=Adam(learning_rate=0.00001), metrics=['accuracy'])
Model.summary()
Model: "sequential_2"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 conv2d_3 (Conv2D)           (None, 80, 80, 32)        896       
                                                                 
 activation_6 (Activation)   (None, 80, 80, 32)        0         
                                                                 
 max_pooling2d_3 (MaxPooling  (None, 40, 40, 32)       0         
 2D)                                                             
                                                                 
 batch_normalization_2 (Batc  (None, 40, 40, 32)       128       
 hNormalization)                                                 
                                                                 
 dropout_3 (Dropout)         (None, 40, 40, 32)        0         
                                                                 
 conv2d_4 (Conv2D)           (None, 40, 40, 64)        18496     
                                                                 
 activation_7 (Activation)   (None, 40, 40, 64)        0         
                                                                 
 max_pooling2d_4 (MaxPooling  (None, 20, 20, 64)       0         
 2D)                                                             
                                                                 
 batch_normalization_3 (Batc  (None, 20, 20, 64)       256       
 hNormalization)                                                 
                                                                 
 dropout_4 (Dropout)         (None, 20, 20, 64)        0         
                                                                 
 conv2d_5 (Conv2D)           (None, 20, 20, 128)       73856     
                                                                 
 activation_8 (Activation)   (None, 20, 20, 128)       0         
                                                                 
 max_pooling2d_5 (MaxPooling  (None, 10, 10, 128)      0         
 2D)                                                             
                                                                 
 flatten_1 (Flatten)         (None, 12800)             0         
                                                                 
 dense_3 (Dense)             (None, 128)               1638528   
                                                                 
 activation_9 (Activation)   (None, 128)               0         
                                                                 
 dropout_5 (Dropout)         (None, 128)               0         
                                                                 
 dense_4 (Dense)             (None, 32)                4128      
                                                                 
 activation_10 (Activation)  (None, 32)                0         
                                                                 
 dense_5 (Dense)             (None, 1)                 33        
                                                                 
 activation_11 (Activation)  (None, 1)                 0         
                                                                 
=================================================================
Total params: 1,736,321
Trainable params: 1,736,129
Non-trainable params: 192
_________________________________________________________________
/usr/local/lib/python3.7/dist-packages/keras/optimizer_v2/adam.py:105: UserWarning: The `lr` argument is deprecated, use `learning_rate` instead.
  super(Adam, self).__init__(name, **kwargs)
In [ ]:
history = Model.fit(x = x_train, y= y_train, batch_size=64, epochs=200, verbose=1, shuffle=True, validation_split=0.2)
Epoch 1/200
25/25 [==============================] - 1s 30ms/step - loss: 0.5920 - accuracy: 0.7151 - val_loss: 0.6846 - val_accuracy: 0.5882
Epoch 2/200
25/25 [==============================] - 1s 20ms/step - loss: 0.4541 - accuracy: 0.7926 - val_loss: 0.6622 - val_accuracy: 0.6113
Epoch 3/200
25/25 [==============================] - 1s 20ms/step - loss: 0.3972 - accuracy: 0.8175 - val_loss: 0.5847 - val_accuracy: 0.7954
Epoch 4/200
25/25 [==============================] - 1s 20ms/step - loss: 0.3518 - accuracy: 0.8406 - val_loss: 0.5416 - val_accuracy: 0.8312
Epoch 5/200
25/25 [==============================] - 1s 20ms/step - loss: 0.3271 - accuracy: 0.8502 - val_loss: 0.5232 - val_accuracy: 0.8261
Epoch 6/200
25/25 [==============================] - 1s 25ms/step - loss: 0.3069 - accuracy: 0.8758 - val_loss: 0.4722 - val_accuracy: 0.8593
Epoch 7/200
25/25 [==============================] - 1s 20ms/step - loss: 0.2779 - accuracy: 0.8835 - val_loss: 0.3842 - val_accuracy: 0.9003
Epoch 8/200
25/25 [==============================] - 1s 24ms/step - loss: 0.2774 - accuracy: 0.8828 - val_loss: 0.3565 - val_accuracy: 0.9054
Epoch 9/200
25/25 [==============================] - 0s 20ms/step - loss: 0.2742 - accuracy: 0.8873 - val_loss: 0.3255 - val_accuracy: 0.8875
Epoch 10/200
25/25 [==============================] - 0s 20ms/step - loss: 0.2486 - accuracy: 0.8988 - val_loss: 0.2899 - val_accuracy: 0.9079
Epoch 11/200
25/25 [==============================] - 1s 20ms/step - loss: 0.2314 - accuracy: 0.8982 - val_loss: 0.2894 - val_accuracy: 0.9105
Epoch 12/200
25/25 [==============================] - 1s 20ms/step - loss: 0.2263 - accuracy: 0.9027 - val_loss: 0.2528 - val_accuracy: 0.9130
Epoch 13/200
25/25 [==============================] - 0s 20ms/step - loss: 0.2056 - accuracy: 0.9181 - val_loss: 0.2422 - val_accuracy: 0.9003
Epoch 14/200
25/25 [==============================] - 0s 20ms/step - loss: 0.1952 - accuracy: 0.9251 - val_loss: 0.2262 - val_accuracy: 0.9130
Epoch 15/200
25/25 [==============================] - 0s 20ms/step - loss: 0.1828 - accuracy: 0.9353 - val_loss: 0.2187 - val_accuracy: 0.9130
Epoch 16/200
25/25 [==============================] - 0s 20ms/step - loss: 0.1757 - accuracy: 0.9353 - val_loss: 0.2126 - val_accuracy: 0.9130
Epoch 17/200
25/25 [==============================] - 1s 20ms/step - loss: 0.1890 - accuracy: 0.9219 - val_loss: 0.2169 - val_accuracy: 0.9028
Epoch 18/200
25/25 [==============================] - 0s 20ms/step - loss: 0.1643 - accuracy: 0.9385 - val_loss: 0.2059 - val_accuracy: 0.9105
Epoch 19/200
25/25 [==============================] - 0s 20ms/step - loss: 0.1769 - accuracy: 0.9315 - val_loss: 0.1989 - val_accuracy: 0.9130
Epoch 20/200
25/25 [==============================] - 0s 20ms/step - loss: 0.1676 - accuracy: 0.9366 - val_loss: 0.1867 - val_accuracy: 0.9233
Epoch 21/200
25/25 [==============================] - 1s 20ms/step - loss: 0.1629 - accuracy: 0.9385 - val_loss: 0.1844 - val_accuracy: 0.9258
Epoch 22/200
25/25 [==============================] - 0s 20ms/step - loss: 0.1649 - accuracy: 0.9398 - val_loss: 0.1802 - val_accuracy: 0.9233
Epoch 23/200
25/25 [==============================] - 0s 20ms/step - loss: 0.1567 - accuracy: 0.9385 - val_loss: 0.1964 - val_accuracy: 0.9028
Epoch 24/200
25/25 [==============================] - 1s 20ms/step - loss: 0.1448 - accuracy: 0.9392 - val_loss: 0.2197 - val_accuracy: 0.9054
Epoch 25/200
25/25 [==============================] - 1s 20ms/step - loss: 0.1537 - accuracy: 0.9430 - val_loss: 0.2002 - val_accuracy: 0.9105
Epoch 26/200
25/25 [==============================] - 0s 20ms/step - loss: 0.1501 - accuracy: 0.9411 - val_loss: 0.1783 - val_accuracy: 0.9284
Epoch 27/200
25/25 [==============================] - 0s 20ms/step - loss: 0.1321 - accuracy: 0.9545 - val_loss: 0.1838 - val_accuracy: 0.9233
Epoch 28/200
25/25 [==============================] - 0s 20ms/step - loss: 0.1314 - accuracy: 0.9539 - val_loss: 0.1978 - val_accuracy: 0.9156
Epoch 29/200
25/25 [==============================] - 1s 20ms/step - loss: 0.1240 - accuracy: 0.9526 - val_loss: 0.1807 - val_accuracy: 0.9207
Epoch 30/200
25/25 [==============================] - 0s 20ms/step - loss: 0.1341 - accuracy: 0.9494 - val_loss: 0.2021 - val_accuracy: 0.9105
Epoch 31/200
25/25 [==============================] - 0s 20ms/step - loss: 0.1319 - accuracy: 0.9469 - val_loss: 0.2151 - val_accuracy: 0.8977
Epoch 32/200
25/25 [==============================] - 0s 20ms/step - loss: 0.1162 - accuracy: 0.9533 - val_loss: 0.1729 - val_accuracy: 0.9233
Epoch 33/200
25/25 [==============================] - 1s 20ms/step - loss: 0.1250 - accuracy: 0.9526 - val_loss: 0.1830 - val_accuracy: 0.9182
Epoch 34/200
25/25 [==============================] - 0s 20ms/step - loss: 0.1102 - accuracy: 0.9603 - val_loss: 0.1980 - val_accuracy: 0.9079
Epoch 35/200
25/25 [==============================] - 0s 20ms/step - loss: 0.1156 - accuracy: 0.9513 - val_loss: 0.2156 - val_accuracy: 0.9028
Epoch 36/200
25/25 [==============================] - 0s 20ms/step - loss: 0.1013 - accuracy: 0.9641 - val_loss: 0.1981 - val_accuracy: 0.9105
Epoch 37/200
25/25 [==============================] - 1s 20ms/step - loss: 0.1052 - accuracy: 0.9622 - val_loss: 0.1928 - val_accuracy: 0.9156
Epoch 38/200
25/25 [==============================] - 0s 20ms/step - loss: 0.1058 - accuracy: 0.9609 - val_loss: 0.1908 - val_accuracy: 0.9182
Epoch 39/200
25/25 [==============================] - 0s 20ms/step - loss: 0.1012 - accuracy: 0.9616 - val_loss: 0.2025 - val_accuracy: 0.9079
Epoch 40/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0983 - accuracy: 0.9629 - val_loss: 0.2007 - val_accuracy: 0.9156
Epoch 41/200
25/25 [==============================] - 1s 20ms/step - loss: 0.1062 - accuracy: 0.9565 - val_loss: 0.2117 - val_accuracy: 0.9079
Epoch 42/200
25/25 [==============================] - 1s 20ms/step - loss: 0.1007 - accuracy: 0.9641 - val_loss: 0.1980 - val_accuracy: 0.9182
Epoch 43/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0945 - accuracy: 0.9693 - val_loss: 0.2432 - val_accuracy: 0.8926
Epoch 44/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0826 - accuracy: 0.9680 - val_loss: 0.2151 - val_accuracy: 0.9105
Epoch 45/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0846 - accuracy: 0.9680 - val_loss: 0.1924 - val_accuracy: 0.9182
Epoch 46/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0817 - accuracy: 0.9718 - val_loss: 0.1953 - val_accuracy: 0.9156
Epoch 47/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0805 - accuracy: 0.9712 - val_loss: 0.1921 - val_accuracy: 0.9130
Epoch 48/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0837 - accuracy: 0.9648 - val_loss: 0.1869 - val_accuracy: 0.9182
Epoch 49/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0936 - accuracy: 0.9603 - val_loss: 0.2425 - val_accuracy: 0.8977
Epoch 50/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0885 - accuracy: 0.9654 - val_loss: 0.2201 - val_accuracy: 0.9079
Epoch 51/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0758 - accuracy: 0.9744 - val_loss: 0.2111 - val_accuracy: 0.9105
Epoch 52/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0854 - accuracy: 0.9686 - val_loss: 0.2073 - val_accuracy: 0.9156
Epoch 53/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0753 - accuracy: 0.9770 - val_loss: 0.2158 - val_accuracy: 0.9130
Epoch 54/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0763 - accuracy: 0.9738 - val_loss: 0.2543 - val_accuracy: 0.8977
Epoch 55/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0684 - accuracy: 0.9757 - val_loss: 0.2042 - val_accuracy: 0.9156
Epoch 56/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0770 - accuracy: 0.9725 - val_loss: 0.2565 - val_accuracy: 0.8951
Epoch 57/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0717 - accuracy: 0.9776 - val_loss: 0.2422 - val_accuracy: 0.9028
Epoch 58/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0729 - accuracy: 0.9712 - val_loss: 0.2252 - val_accuracy: 0.9105
Epoch 59/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0564 - accuracy: 0.9827 - val_loss: 0.2235 - val_accuracy: 0.9054
Epoch 60/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0707 - accuracy: 0.9725 - val_loss: 0.1933 - val_accuracy: 0.9130
Epoch 61/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0731 - accuracy: 0.9750 - val_loss: 0.2055 - val_accuracy: 0.9105
Epoch 62/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0622 - accuracy: 0.9782 - val_loss: 0.1989 - val_accuracy: 0.9130
Epoch 63/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0590 - accuracy: 0.9846 - val_loss: 0.2084 - val_accuracy: 0.9130
Epoch 64/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0635 - accuracy: 0.9776 - val_loss: 0.2272 - val_accuracy: 0.9079
Epoch 65/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0596 - accuracy: 0.9827 - val_loss: 0.2162 - val_accuracy: 0.9079
Epoch 66/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0548 - accuracy: 0.9834 - val_loss: 0.2318 - val_accuracy: 0.9079
Epoch 67/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0492 - accuracy: 0.9853 - val_loss: 0.2037 - val_accuracy: 0.9079
Epoch 68/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0564 - accuracy: 0.9840 - val_loss: 0.2211 - val_accuracy: 0.9105
Epoch 69/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0551 - accuracy: 0.9808 - val_loss: 0.2462 - val_accuracy: 0.9003
Epoch 70/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0513 - accuracy: 0.9834 - val_loss: 0.2001 - val_accuracy: 0.9156
Epoch 71/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0570 - accuracy: 0.9808 - val_loss: 0.2372 - val_accuracy: 0.9105
Epoch 72/200
25/25 [==============================] - 1s 23ms/step - loss: 0.0512 - accuracy: 0.9840 - val_loss: 0.2305 - val_accuracy: 0.9079
Epoch 73/200
25/25 [==============================] - 1s 22ms/step - loss: 0.0462 - accuracy: 0.9878 - val_loss: 0.2630 - val_accuracy: 0.9028
Epoch 74/200
25/25 [==============================] - 1s 27ms/step - loss: 0.0487 - accuracy: 0.9834 - val_loss: 0.1874 - val_accuracy: 0.9182
Epoch 75/200
25/25 [==============================] - 1s 24ms/step - loss: 0.0555 - accuracy: 0.9802 - val_loss: 0.2030 - val_accuracy: 0.9105
Epoch 76/200
25/25 [==============================] - 1s 29ms/step - loss: 0.0613 - accuracy: 0.9763 - val_loss: 0.2060 - val_accuracy: 0.9156
Epoch 77/200
25/25 [==============================] - 1s 25ms/step - loss: 0.0502 - accuracy: 0.9827 - val_loss: 0.1912 - val_accuracy: 0.9182
Epoch 78/200
25/25 [==============================] - 1s 23ms/step - loss: 0.0493 - accuracy: 0.9840 - val_loss: 0.2417 - val_accuracy: 0.9079
Epoch 79/200
25/25 [==============================] - 1s 22ms/step - loss: 0.0423 - accuracy: 0.9878 - val_loss: 0.1987 - val_accuracy: 0.9182
Epoch 80/200
25/25 [==============================] - 1s 27ms/step - loss: 0.0427 - accuracy: 0.9866 - val_loss: 0.2436 - val_accuracy: 0.9028
Epoch 81/200
25/25 [==============================] - 1s 23ms/step - loss: 0.0431 - accuracy: 0.9866 - val_loss: 0.2287 - val_accuracy: 0.9182
Epoch 82/200
25/25 [==============================] - 1s 24ms/step - loss: 0.0435 - accuracy: 0.9821 - val_loss: 0.2463 - val_accuracy: 0.9054
Epoch 83/200
25/25 [==============================] - 1s 25ms/step - loss: 0.0366 - accuracy: 0.9930 - val_loss: 0.2245 - val_accuracy: 0.9105
Epoch 84/200
25/25 [==============================] - 1s 27ms/step - loss: 0.0382 - accuracy: 0.9878 - val_loss: 0.2309 - val_accuracy: 0.9079
Epoch 85/200
25/25 [==============================] - 1s 28ms/step - loss: 0.0500 - accuracy: 0.9795 - val_loss: 0.2130 - val_accuracy: 0.9105
Epoch 86/200
25/25 [==============================] - 1s 26ms/step - loss: 0.0394 - accuracy: 0.9885 - val_loss: 0.3004 - val_accuracy: 0.8951
Epoch 87/200
25/25 [==============================] - 1s 25ms/step - loss: 0.0497 - accuracy: 0.9802 - val_loss: 0.2241 - val_accuracy: 0.9182
Epoch 88/200
25/25 [==============================] - 1s 21ms/step - loss: 0.0414 - accuracy: 0.9840 - val_loss: 0.2138 - val_accuracy: 0.9130
Epoch 89/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0412 - accuracy: 0.9878 - val_loss: 0.2477 - val_accuracy: 0.9028
Epoch 90/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0425 - accuracy: 0.9853 - val_loss: 0.2369 - val_accuracy: 0.9079
Epoch 91/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0408 - accuracy: 0.9853 - val_loss: 0.2769 - val_accuracy: 0.9054
Epoch 92/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0333 - accuracy: 0.9930 - val_loss: 0.2507 - val_accuracy: 0.9079
Epoch 93/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0404 - accuracy: 0.9853 - val_loss: 0.2095 - val_accuracy: 0.9130
Epoch 94/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0295 - accuracy: 0.9930 - val_loss: 0.2165 - val_accuracy: 0.9156
Epoch 95/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0285 - accuracy: 0.9942 - val_loss: 0.2600 - val_accuracy: 0.9028
Epoch 96/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0331 - accuracy: 0.9891 - val_loss: 0.2501 - val_accuracy: 0.9079
Epoch 97/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0310 - accuracy: 0.9891 - val_loss: 0.2275 - val_accuracy: 0.9105
Epoch 98/200
25/25 [==============================] - 1s 22ms/step - loss: 0.0358 - accuracy: 0.9898 - val_loss: 0.2817 - val_accuracy: 0.9003
Epoch 99/200
25/25 [==============================] - 1s 24ms/step - loss: 0.0364 - accuracy: 0.9904 - val_loss: 0.2161 - val_accuracy: 0.9105
Epoch 100/200
25/25 [==============================] - 1s 21ms/step - loss: 0.0272 - accuracy: 0.9936 - val_loss: 0.2180 - val_accuracy: 0.9105
Epoch 101/200
25/25 [==============================] - 1s 21ms/step - loss: 0.0398 - accuracy: 0.9853 - val_loss: 0.2126 - val_accuracy: 0.9079
Epoch 102/200
25/25 [==============================] - 1s 21ms/step - loss: 0.0343 - accuracy: 0.9891 - val_loss: 0.2539 - val_accuracy: 0.9028
Epoch 103/200
25/25 [==============================] - 1s 21ms/step - loss: 0.0416 - accuracy: 0.9866 - val_loss: 0.2091 - val_accuracy: 0.9105
Epoch 104/200
25/25 [==============================] - 1s 21ms/step - loss: 0.0329 - accuracy: 0.9878 - val_loss: 0.2261 - val_accuracy: 0.9105
Epoch 105/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0243 - accuracy: 0.9936 - val_loss: 0.2284 - val_accuracy: 0.9105
Epoch 106/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0343 - accuracy: 0.9872 - val_loss: 0.2457 - val_accuracy: 0.9054
Epoch 107/200
25/25 [==============================] - 1s 22ms/step - loss: 0.0252 - accuracy: 0.9942 - val_loss: 0.2123 - val_accuracy: 0.9182
Epoch 108/200
25/25 [==============================] - 1s 22ms/step - loss: 0.0277 - accuracy: 0.9917 - val_loss: 0.2392 - val_accuracy: 0.9105
Epoch 109/200
25/25 [==============================] - 1s 24ms/step - loss: 0.0276 - accuracy: 0.9917 - val_loss: 0.2317 - val_accuracy: 0.9130
Epoch 110/200
25/25 [==============================] - 1s 22ms/step - loss: 0.0248 - accuracy: 0.9942 - val_loss: 0.2242 - val_accuracy: 0.9105
Epoch 111/200
25/25 [==============================] - 1s 22ms/step - loss: 0.0265 - accuracy: 0.9930 - val_loss: 0.2266 - val_accuracy: 0.9156
Epoch 112/200
25/25 [==============================] - 1s 21ms/step - loss: 0.0283 - accuracy: 0.9923 - val_loss: 0.2771 - val_accuracy: 0.9028
Epoch 113/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0240 - accuracy: 0.9936 - val_loss: 0.2472 - val_accuracy: 0.9105
Epoch 114/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0290 - accuracy: 0.9910 - val_loss: 0.2542 - val_accuracy: 0.9105
Epoch 115/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0374 - accuracy: 0.9872 - val_loss: 0.2145 - val_accuracy: 0.9156
Epoch 116/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0283 - accuracy: 0.9923 - val_loss: 0.2443 - val_accuracy: 0.9105
Epoch 117/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0227 - accuracy: 0.9949 - val_loss: 0.2392 - val_accuracy: 0.9079
Epoch 118/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0244 - accuracy: 0.9949 - val_loss: 0.2559 - val_accuracy: 0.9105
Epoch 119/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0241 - accuracy: 0.9930 - val_loss: 0.2137 - val_accuracy: 0.9130
Epoch 120/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0212 - accuracy: 0.9968 - val_loss: 0.2639 - val_accuracy: 0.9028
Epoch 121/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0267 - accuracy: 0.9917 - val_loss: 0.2512 - val_accuracy: 0.9054
Epoch 122/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0222 - accuracy: 0.9949 - val_loss: 0.2137 - val_accuracy: 0.9130
Epoch 123/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0263 - accuracy: 0.9923 - val_loss: 0.1876 - val_accuracy: 0.9233
Epoch 124/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0174 - accuracy: 0.9968 - val_loss: 0.2321 - val_accuracy: 0.9130
Epoch 125/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0247 - accuracy: 0.9930 - val_loss: 0.2205 - val_accuracy: 0.9156
Epoch 126/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0170 - accuracy: 0.9974 - val_loss: 0.2054 - val_accuracy: 0.9156
Epoch 127/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0158 - accuracy: 0.9981 - val_loss: 0.2169 - val_accuracy: 0.9182
Epoch 128/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0245 - accuracy: 0.9923 - val_loss: 0.2415 - val_accuracy: 0.9105
Epoch 129/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0241 - accuracy: 0.9930 - val_loss: 0.2949 - val_accuracy: 0.9003
Epoch 130/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0187 - accuracy: 0.9962 - val_loss: 0.2061 - val_accuracy: 0.9182
Epoch 131/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0239 - accuracy: 0.9917 - val_loss: 0.2088 - val_accuracy: 0.9156
Epoch 132/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0248 - accuracy: 0.9942 - val_loss: 0.2229 - val_accuracy: 0.9105
Epoch 133/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0214 - accuracy: 0.9923 - val_loss: 0.2331 - val_accuracy: 0.9079
Epoch 134/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0204 - accuracy: 0.9962 - val_loss: 0.2422 - val_accuracy: 0.9079
Epoch 135/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0207 - accuracy: 0.9942 - val_loss: 0.2483 - val_accuracy: 0.9105
Epoch 136/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0200 - accuracy: 0.9955 - val_loss: 0.2466 - val_accuracy: 0.9079
Epoch 137/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0207 - accuracy: 0.9942 - val_loss: 0.2148 - val_accuracy: 0.9182
Epoch 138/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0214 - accuracy: 0.9942 - val_loss: 0.2268 - val_accuracy: 0.9156
Epoch 139/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0155 - accuracy: 0.9962 - val_loss: 0.2667 - val_accuracy: 0.9028
Epoch 140/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0150 - accuracy: 0.9974 - val_loss: 0.2193 - val_accuracy: 0.9156
Epoch 141/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0141 - accuracy: 0.9974 - val_loss: 0.2151 - val_accuracy: 0.9156
Epoch 142/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0140 - accuracy: 0.9974 - val_loss: 0.2269 - val_accuracy: 0.9156
Epoch 143/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0213 - accuracy: 0.9923 - val_loss: 0.2182 - val_accuracy: 0.9156
Epoch 144/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0123 - accuracy: 0.9968 - val_loss: 0.2269 - val_accuracy: 0.9182
Epoch 145/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0132 - accuracy: 0.9968 - val_loss: 0.1980 - val_accuracy: 0.9258
Epoch 146/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0186 - accuracy: 0.9955 - val_loss: 0.2100 - val_accuracy: 0.9156
Epoch 147/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0176 - accuracy: 0.9962 - val_loss: 0.2178 - val_accuracy: 0.9130
Epoch 148/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0201 - accuracy: 0.9955 - val_loss: 0.3188 - val_accuracy: 0.9003
Epoch 149/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0187 - accuracy: 0.9968 - val_loss: 0.2354 - val_accuracy: 0.9105
Epoch 150/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0178 - accuracy: 0.9968 - val_loss: 0.2513 - val_accuracy: 0.9105
Epoch 151/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0142 - accuracy: 0.9955 - val_loss: 0.2209 - val_accuracy: 0.9156
Epoch 152/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0139 - accuracy: 0.9968 - val_loss: 0.2021 - val_accuracy: 0.9233
Epoch 153/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0141 - accuracy: 0.9962 - val_loss: 0.2111 - val_accuracy: 0.9233
Epoch 154/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0134 - accuracy: 0.9962 - val_loss: 0.2025 - val_accuracy: 0.9207
Epoch 155/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0094 - accuracy: 0.9994 - val_loss: 0.1835 - val_accuracy: 0.9258
Epoch 156/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0125 - accuracy: 0.9968 - val_loss: 0.2072 - val_accuracy: 0.9233
Epoch 157/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0127 - accuracy: 0.9968 - val_loss: 0.2032 - val_accuracy: 0.9233
Epoch 158/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0149 - accuracy: 0.9968 - val_loss: 0.1958 - val_accuracy: 0.9233
Epoch 159/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0143 - accuracy: 0.9962 - val_loss: 0.2279 - val_accuracy: 0.9156
Epoch 160/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0120 - accuracy: 0.9981 - val_loss: 0.2207 - val_accuracy: 0.9182
Epoch 161/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0121 - accuracy: 0.9981 - val_loss: 0.2151 - val_accuracy: 0.9182
Epoch 162/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0117 - accuracy: 0.9968 - val_loss: 0.2462 - val_accuracy: 0.9105
Epoch 163/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0114 - accuracy: 0.9981 - val_loss: 0.1876 - val_accuracy: 0.9258
Epoch 164/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0108 - accuracy: 0.9981 - val_loss: 0.1967 - val_accuracy: 0.9258
Epoch 165/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0112 - accuracy: 0.9981 - val_loss: 0.2111 - val_accuracy: 0.9233
Epoch 166/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0102 - accuracy: 0.9994 - val_loss: 0.1902 - val_accuracy: 0.9284
Epoch 167/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0167 - accuracy: 0.9968 - val_loss: 0.1851 - val_accuracy: 0.9258
Epoch 168/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0128 - accuracy: 0.9962 - val_loss: 0.2213 - val_accuracy: 0.9233
Epoch 169/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0109 - accuracy: 0.9981 - val_loss: 0.2903 - val_accuracy: 0.9028
Epoch 170/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0111 - accuracy: 0.9974 - val_loss: 0.2402 - val_accuracy: 0.9105
Epoch 171/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0161 - accuracy: 0.9949 - val_loss: 0.2060 - val_accuracy: 0.9207
Epoch 172/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0114 - accuracy: 0.9974 - val_loss: 0.2200 - val_accuracy: 0.9182
Epoch 173/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0113 - accuracy: 0.9994 - val_loss: 0.2237 - val_accuracy: 0.9182
Epoch 174/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0127 - accuracy: 0.9968 - val_loss: 0.2500 - val_accuracy: 0.9079
Epoch 175/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0096 - accuracy: 0.9981 - val_loss: 0.2170 - val_accuracy: 0.9233
Epoch 176/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0097 - accuracy: 0.9987 - val_loss: 0.2057 - val_accuracy: 0.9284
Epoch 177/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0079 - accuracy: 0.9994 - val_loss: 0.2506 - val_accuracy: 0.9079
Epoch 178/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0098 - accuracy: 0.9974 - val_loss: 0.2317 - val_accuracy: 0.9207
Epoch 179/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0110 - accuracy: 0.9987 - val_loss: 0.2224 - val_accuracy: 0.9233
Epoch 180/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0100 - accuracy: 0.9994 - val_loss: 0.2070 - val_accuracy: 0.9284
Epoch 181/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0097 - accuracy: 0.9974 - val_loss: 0.2412 - val_accuracy: 0.9182
Epoch 182/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0115 - accuracy: 0.9942 - val_loss: 0.2618 - val_accuracy: 0.9079
Epoch 183/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0172 - accuracy: 0.9942 - val_loss: 0.2077 - val_accuracy: 0.9233
Epoch 184/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0106 - accuracy: 0.9962 - val_loss: 0.2246 - val_accuracy: 0.9258
Epoch 185/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0191 - accuracy: 0.9942 - val_loss: 0.1825 - val_accuracy: 0.9284
Epoch 186/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0091 - accuracy: 0.9994 - val_loss: 0.2279 - val_accuracy: 0.9233
Epoch 187/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0094 - accuracy: 0.9987 - val_loss: 0.2784 - val_accuracy: 0.9079
Epoch 188/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0114 - accuracy: 0.9974 - val_loss: 0.2485 - val_accuracy: 0.9156
Epoch 189/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0086 - accuracy: 0.9981 - val_loss: 0.2095 - val_accuracy: 0.9258
Epoch 190/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0083 - accuracy: 0.9994 - val_loss: 0.2127 - val_accuracy: 0.9233
Epoch 191/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0095 - accuracy: 0.9981 - val_loss: 0.2281 - val_accuracy: 0.9233
Epoch 192/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0080 - accuracy: 0.9994 - val_loss: 0.2699 - val_accuracy: 0.9054
Epoch 193/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0084 - accuracy: 0.9987 - val_loss: 0.2490 - val_accuracy: 0.9130
Epoch 194/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0062 - accuracy: 0.9987 - val_loss: 0.2378 - val_accuracy: 0.9182
Epoch 195/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0101 - accuracy: 0.9974 - val_loss: 0.2552 - val_accuracy: 0.9105
Epoch 196/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0082 - accuracy: 0.9981 - val_loss: 0.2523 - val_accuracy: 0.9156
Epoch 197/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0069 - accuracy: 0.9994 - val_loss: 0.2437 - val_accuracy: 0.9156
Epoch 198/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0102 - accuracy: 0.9974 - val_loss: 0.2234 - val_accuracy: 0.9258
Epoch 199/200
25/25 [==============================] - 0s 20ms/step - loss: 0.0066 - accuracy: 0.9994 - val_loss: 0.2790 - val_accuracy: 0.9079
Epoch 200/200
25/25 [==============================] - 1s 20ms/step - loss: 0.0072 - accuracy: 0.9987 - val_loss: 0.2198 - val_accuracy: 0.9258

After 200 training epochs, we will plot the accuracy and loss curves:

In [ ]:
plt.plot(history.history['accuracy'])
plt.plot(history.history['val_accuracy'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'val'], loc='lower right')
plt.show()
No description has been provided for this image
In [ ]:
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'val'], loc='upper right')
plt.show()
No description has been provided for this image

We can save both the architecture and the trained weights for later use.

In [ ]:
model_json = Model.to_json()
with open("drive/My Drive/Datasets/Car_detection/Model200_new.json", "w") as json_file:
    json_file.write(model_json)
# serialize weights to HDF5
Model.save_weights("drive/My Drive/Datasets/Car_detection/Model200_weights_new.weights.h5")
In [ ]:
from keras.models import model_from_json
In [ ]:
json_file = open('drive/My Drive/Datasets/Car_detection/Model200_new.json', 'r')
loaded_model_json = json_file.read()
json_file.close()
loaded_model = model_from_json(loaded_model_json)
# load weights into new model
loaded_model.load_weights("drive/My Drive/Datasets/Car_detection/Model200_weights_new.weights.h5")

Now let's apply our model to an entire image. To make this possible, we will create functions to cycle through and divide the image into sizes equal to the one we trained and also resize the image.

In [ ]:
def resize(image, width=None, height=None, inter=cv2.INTER_AREA):
	dim = None
	(h, w) = image.shape[:2]
	if width is None and height is None:
		return image
	if width is None:
		r = height / float(h)
		dim = (int(w * r), height)
	else:
		r = width / float(w)
		dim = (width, int(h * r))
	resized = cv2.resize(image, dim, interpolation=inter)
	return resized
In [ ]:
def pyramid(image, scale=1.5, minSize=(30, 30)):
	yield image
	while True:
		w = int(image.shape[1] / scale)
		image = resize(image, width=w)
		if image.shape[0] < minSize[1] or image.shape[1] < minSize[0]:
			break
		yield image

def sliding_window(image, stepSize, windowSize):
	for y in range(0, image.shape[0], stepSize):
		for x in range(0, image.shape[1], stepSize):
			yield (x, y, image[y:y + windowSize[1], x:x + windowSize[0]])

We can import our test image:

In [ ]:
test_path = 'drive/My Drive/Datasets/Car_detection/Example_cars_detect.tif'
In [ ]:
src_test = rasterio.open(test_path)
im_test = src_test.read()
In [ ]:
im_test = im_test.transpose([1,2,0])
In [ ]:
im_test = im_test.astype('uint8')
In [ ]:
image = im_test[:,:,0:3]

Let's set the parameters and apply the sliding window to our image. If the model detects cars in the window, the boundary of that window will be added to the list of bboxes:

In [ ]:
scale = 1
minSize = (4000, 4000)
# Sliding window parameters
stepSize = 20
(winW, winH) = (80, 80)
In [ ]:
bboxes = np.zeros(4,np.int64)
for i, resized in enumerate(pyramid(image, scale=scale, minSize=minSize)):
  for (x, y, window) in sliding_window(resized, stepSize=stepSize, windowSize=(winW, winH)):
    if (window.shape[0] == winH and window.shape[1] == winW):
      clone = resized.copy()
      cropped_img = resized[y:y + winH, x:x + winW]
      cropped_img = cropped_img[np.newaxis,:,:,:]
      cropped_img = cropped_img/255
      predict = loaded_model.predict(cropped_img)
      predict = np.round(predict).astype(np.uint8)
      y_pred = predict[0][0]


      if y_pred == 1:
        print('detected')
        if i != 0:
          bboxes = np.vstack((bboxes, np.array([
            int(x*scale*i), int(y*scale*i),
            int((x + winW)*scale*i), int((y + winH)*scale*i)])))
        else:
          bboxes = np.vstack((bboxes, np.array([
            int(x),int(y),int(x + winW), int(y + winH)])))
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected
detected

Let's plot the results:

In [ ]:
bboxes = np.delete(bboxes, (0), axis=0)
In [ ]:
plt.figure(figsize=[20,20])
plt.imshow(im_test)
for box in bboxes:
  rect = plt.Rectangle((box[0], box[1]), box[2]-box[0], box[3]-box[1], edgecolor='r', facecolor='none')
  plt.gca().add_patch(rect)
No description has been provided for this image

We saw that we have regions with several bboxes close to each other. Let's use non-maximum suppression to remove inaccurate detections from the same region:

In [ ]:
def non_max_suppression_fast(boxes, overlapThresh):
	'''
	This function performs non maximal supression over a set of bounding boxes
	where the resulting boxes contain most of the information of the initial
	boxes.
	Args:
		boxes(numpy.array): Set of bounding boxes of shape=(None,4).
		overlapThresh(float): Supression threshold for the overlapped area
		between boxes.
	Returns:
		boxes(numpy.array): Resulting boxes after performing the NMS of
		shape=(None,4).
	'''

	if len(boxes) == 0:
		return []

	if boxes.dtype.kind == "i":
		boxes = boxes.astype("float")


	pick = []


	x1 = boxes[:,0]
	y1 = boxes[:,1]
	x2 = boxes[:,2]
	y2 = boxes[:,3]

	area = (x2 - x1 + 1) * (y2 - y1 + 1)
	idxs = np.argsort(y2)

	while len(idxs) > 0:
		last = len(idxs) - 1
		i = idxs[last]
		pick.append(i)

		xx1 = np.maximum(x1[i], x1[idxs[:last]])
		yy1 = np.maximum(y1[i], y1[idxs[:last]])
		xx2 = np.minimum(x2[i], x2[idxs[:last]])
		yy2 = np.minimum(y2[i], y2[idxs[:last]])

		# compute the width and height of the bounding box
		w = np.maximum(0, xx2 - xx1 + 1)
		h = np.maximum(0, yy2 - yy1 + 1)

		overlap = (w * h) / area[idxs[:last]]

		idxs = np.delete(idxs, np.concatenate(([last],
			np.where(overlap > overlapThresh)[0])))

	return boxes[pick].astype("int")
In [ ]:
nms_bboxes = non_max_suppression_fast(bboxes, 0.3)
In [ ]:
plt.figure(figsize=[20,20])
plt.imshow(im_test)
for box in nms_bboxes:
  rect = plt.Rectangle((box[0], box[1]), box[2]-box[0], box[3]-box[1], edgecolor='r', facecolor='none')
  plt.gca().add_patch(rect)
No description has been provided for this image

For each detection found, we will obtain the centroid and convert it to latitude and longitude values:

In [ ]:
ls_x = []
ls_y = []
for i in nms_bboxes:
  xcenter = (i[2] - i[0])/2 + i[0]
  ycenter = (i[3] - i[1])/2 + i[1]
  xcenter = np.round(xcenter)
  ycenter = np.round(ycenter)
  xs, ys = rasterio.transform.xy(src_test.transform, ycenter, xcenter)
  ls_x.append(xs)
  ls_y.append(ys)
In [ ]:
df_xy = pd.DataFrame([])
df_xy['x'] = ls_x
df_xy['y'] = ls_y

Finally, we create a geodataframe of points:

In [ ]:
gdf = gpd.GeoDataFrame(df_xy, geometry=gpd.points_from_xy(df_xy['x'], df_xy['y']))
In [ ]:
gdf = gdf.set_crs(src_test.crs.to_dict()['init'])
In [ ]:
fig, ax = plt.subplots(figsize=(20, 20))
show((src_test,[1,2,3]), ax=ax)
gdf.plot(ax=ax,  marker='o', color='red', markersize=15)
Out[ ]:
<matplotlib.axes._subplots.AxesSubplot at 0x7f89d4e259d0>
No description has been provided for this image

We save it in a .json file and download:

In [ ]:
gdf.to_file('/content/cars_locations.json', driver="GeoJSON")
In [ ]:
from google.colab import files
files.download('/content/cars_locations.json')